runtime.mspan.base (method)

82 uses

	runtime (current package)
		arena.go#L554: 	base := s.base()
		arena.go#L601: 	offset := addr - s.base()
		arena.go#L666: 	offset := addr - s.base()
		arena.go#L768: 	x := unsafe.Pointer(span.base())
		arena.go#L775: 		gcmarknewobject(span, span.base())
		arena.go#L780: 		racemalloc(unsafe.Pointer(span.base()), span.elemsize)
		arena.go#L785: 		msanmalloc(unsafe.Pointer(span.base()), span.elemsize)
		arena.go#L791: 		rzStart := span.base() + span.elemsize
		arena.go#L793: 		asanunpoison(unsafe.Pointer(span.base()), span.elemsize)
		arena.go#L805: 			profilealloc(mp, unsafe.Pointer(span.base()), userArenaChunkBytes)
		arena.go#L878: 	sysFault(unsafe.Pointer(s.base()), s.npages*pageSize)
		arena.go#L907: 		racefree(unsafe.Pointer(s.base()), s.elemsize)
		arena.go#L945: 		racefree(unsafe.Pointer(s.base()), s.elemsize)
		arena.go#L948: 		msanfree(unsafe.Pointer(s.base()), s.elemsize)
		arena.go#L951: 		asanpoison(unsafe.Pointer(s.base()), s.elemsize)
		arena.go#L1001: 		base = s.base()
		arena.go#L1062: 	s.limit = s.base() + s.elemsize
		arena.go#L1101: 	memclrNoHeapPointers(unsafe.Pointer(s.base()), s.elemsize)
		heapdump.go#L462: 				p := unsafe.Pointer(s.base() + uintptr(spf.special.offset))
		heapdump.go#L484: 		p := s.base()
		heapdump.go#L661: 			p := s.base() + uintptr(spp.special.offset)
		malloc.go#L939: 			return gclinkptr(uintptr(result)*s.elemsize + s.base())
		malloc.go#L975: 	v = gclinkptr(uintptr(freeIndex)*s.elemsize + s.base())
		malloc.go#L1545: 	x := unsafe.Pointer(span.base())
		mbitmap.go#L372: 	return span.base() + span.objIndex(addr)*span.elemsize
		mbitmap.go#L441: 	} else if s.state.get() != mSpanInUse || dst < s.base() || s.limit <= dst {
		mbitmap.go#L579: 		return heapBitsSlice(span.base(), pageSize)
		mbitmap.go#L581: 	return heapBitsSlice(span.base(), span.npages*pageSize)
		mbitmap.go#L604: 	hbits := (*byte)(unsafe.Pointer(span.base() + spanSize - bitmapSize))
		mbitmap.go#L614: 	i := (addr - span.base()) / goarch.PtrSize / ptrBits
		mbitmap.go#L615: 	j := (addr - span.base()) / goarch.PtrSize % ptrBits
		mbitmap.go#L670: 	dst := unsafe.Pointer(span.base() + pageSize - pageSize/goarch.PtrSize/8)
		mbitmap.go#L671: 	o := (x - span.base()) / goarch.PtrSize
		mbitmap.go#L1149: 	return s.divideByElemSize(p - s.base())
		mbitmap.go#L1236: 		print(" span.base()=", hex(s.base()), " span.limit=", hex(s.limit), " span.state=", state)
		mbitmap.go#L1288: 	if state := s.state.get(); state != mSpanInUse || p < s.base() || p >= s.limit {
		mbitmap.go#L1302: 	base = s.base() + objIndex*s.elemsize
		mcache.go#L263: 	s.limit = s.base() + size
		mcleanup.go#L152: 	offset := uintptr(unsafe.Pointer(c.ptr)) - span.base()
		mgcmark.go#L391: 					p := s.base() + uintptr(spf.special.offset)/s.elemsize*s.elemsize
		mgcmark.go#L1408: 		if b == s.base() {
		mgcmark.go#L1414: 			for oblet := b + maxObletBytes; oblet < s.base()+s.elemsize; oblet += maxObletBytes {
		mgcmark.go#L1424: 		n = s.base() + s.elemsize - b
		mgcmark.go#L1426: 		tp = s.typePointersOfUnchecked(s.base())
		mgcmark.go#L1561: 		obj := span.base() + idx*span.elemsize
		mgcmark.go#L1613: 		arena, pageIdx, pageMask := pageIndexOf(span.base())
		mgcmark.go#L1646: 	print(" s.base()=", hex(s.base()), " s.limit=", hex(s.limit), " s.spanclass=", s.spanclass, " s.elemsize=", s.elemsize, " s.state=")
		mgcmark.go#L1705: 	arena, pageIdx, pageMask := pageIndexOf(span.base())
		mgcsweep.go#L550: 		p := s.base() + objIndex*size
		mgcsweep.go#L556: 			endOffset := p - s.base() + size
		mgcsweep.go#L573: 					p := s.base() + uintptr(special.offset)
		mgcsweep.go#L589: 					p := s.base() + uintptr(special.offset)
		mgcsweep.go#L616: 				x := s.base() + i*s.elemsize
		mgcsweep.go#L816: 				sysFault(unsafe.Pointer(s.base()), size)
		mgcsweep.go#L849: 		addr := s.base() + i*s.elemsize
		mgcwork.go#L391: 			newb := (*workbuf)(unsafe.Pointer(s.base() + i))
		mheap.go#L498: func (s *mspan) base() uintptr {
		mheap.go#L654: 	if s == nil || b < s.base() {
		mheap.go#L729: 	if s == nil || s.state.get() != mSpanInUse || p < s.base() || p >= s.limit {
		mheap.go#L1429: 		s.limit = s.base() + uintptr(s.elemsize)*uintptr(s.nelems)
		mheap.go#L1456: 	h.setSpans(s.base(), npages, s)
		mheap.go#L1464: 		arena, pageIdx, pageMask := pageIndexOf(s.base())
		mheap.go#L1579: 			base := unsafe.Pointer(s.base())
		mheap.go#L1585: 			base := unsafe.Pointer(s.base())
		mheap.go#L1634: 			print("mheap.freeSpanLocked - span ", s, " ptr ", hex(s.base()), " allocCount ", s.allocCount, " sweepgen ", s.sweepgen, "/", h.sweepgen, "\n")
		mheap.go#L1640: 		arena, pageIdx, pageMask := pageIndexOf(s.base())
		mheap.go#L1669: 	h.pages.free(s.base(), s.npages)
		mheap.go#L1851: 	arenaPage := (s.base() / pageSize) % pagesPerArena
		mheap.go#L1852: 	ai := arenaIndex(s.base())
		mheap.go#L1859: 	arenaPage := (s.base() / pageSize) % pagesPerArena
		mheap.go#L1860: 	ai := arenaIndex(s.base())
		mheap.go#L1883: 	offset := uintptr(p) - span.base()
		mheap.go#L1921: 	offset := uintptr(p) - span.base()
		mheap.go#L2281: 	offset := uintptr(p) - span.base()
		mwbbuf.go#L253: 		arena, pageIdx, pageMask := pageIndexOf(span.base())
		signal_unix.go#L410: 			if s != nil && s.state.get() == mSpanManual && s.base() < sp && sp < s.limit {
		signal_unix.go#L411: 				gp := *(**g)(unsafe.Pointer(s.base()))
		stack.go#L212: 			x := gclinkptr(s.base() + i)
		stack.go#L415: 		v = unsafe.Pointer(s.base())
		stack.go#L505: 			println(hex(s.base()), v)
		traceallocfree.go#L69: 				x := s.base() + i*s.elemsize
		traceallocfree.go#L107: 	return traceArg(uint64(s.base())-trace.minPageHeapAddr) / pageSize